miχpods: MOM6
Contents
miχpods: MOM6#
baseline KPP missing
bootstrap error on mean, median?
Try daily vs hourly
fix EUC max at 110
figure out time base
shear spectra
check eps vs Ri histogram
I think my Ri is too low?
Notes:#
Diagnostic notes/improvements#
match vertical resolution and extent
restrict TAO S2 to ADCP depth range
restrict to top 200m.
Filtering out MLD makes a big difference!
SInce we’re working with derivatives does the vertical grid matter (as long as it is coarser than observations)?
1 difvho ocean_vertical_heat_diffusivity
2 difvso ocean_vertical_salt_diffusivity
Future model simulations#
Need lateral / neutral terms for total χ, ε
Add Atlantic TAO moorings + LOTUS, PAPA, others used in LMD94
Fix restart issue
Uncertainties#
Why can’t I reproduce the ε figure?
Why can’t I save the new station data
Need to consider Ri smoothing in models: From Gustavo:
You mentioned some kind of inconsistency between your diagnostics and what MOM6 was doing for the interior shear mixing scheme. I am working to implement the option to apply vertical smoothing in Ri multiple times, instead of only once which is what we are doing right now. I noticed that the diagnostic
ri_grad_shearis saved before the smoothing is applied. There is another diagnostic (ri_grad_shear_smooth) that saves the smoothed Ri. Perhaps you were looking at ri_grad_shear instead of ri_grad_shear_smooth and this can explain the inconsistency.
Diagnostics Effort notes#
go through
prepare; pass in dataset and optionalxgcm.Griddifferent pre-processing steps for different datasets
order, sign of z coordinate is painful; here normalizing
composing with matplotlib subfigures
References#
Warner & Moum (2019)
Setup#
%load_ext watermark
import datetime
import glob
import os
import warnings
import cf_xarray as cfxr
import dask
import dask_jobqueue
import dcpy
import distributed
import flox.xarray
import holoviews as hv
import hvplot.xarray
import matplotlib as mpl
import matplotlib.pyplot as plt
import mom6_tools
import numpy as np
import pandas as pd
import xarray as xr
import xgcm
from holoviews import opts
%aimport pump
from pump import mixpods
hv.notebook_extension("bokeh")
cfxr.set_options(warn_on_missing_variables=False)
plt.style.use("bmh")
plt.rcParams["figure.dpi"] = 140
xr.set_options(keep_attrs=True, display_expand_data=False)
gcmdir = "/glade/campaign/cgd/oce/people/bachman/TPOS_1_20_20_year/OUTPUT/" # MITgcm output directory
stationdirname = gcmdir
mixing_layer_depth_criteria = {
"boundary_layer_depth": {"name": "KPPhbl|KPP_OBLdepth|ePBL_h_ML"},
}
# MOM6 run catalog
catalog = {
"baseline": (
"baseline",
"gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods",
),
"epbl": ("ePBL", "gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.epbl.001.mixpods"),
"kpp.lmd.002": (
"KPP Ri0=0.5",
"gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.kpp.lmd.002.mixpods",
),
"kpp.lmd.003": (
"KPP Ri0=0.5, Ric=0.2,",
"gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.kpp.lmd.003.mixpods",
),
"kpp.lmd.004": (
"KPP ν0=2.5, Ric=0.2, Ri0=0.5",
"gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.kpp.lmd.004.mixpods",
),
}
%watermark -iv
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:20: FutureWarning: tmpfile is deprecated and will be removed in a future release. Please use dask.utils.tmpfile instead.
from distributed.utils import tmpfile
dask_jobqueue: 0.7.3
matplotlib : 3.6.2
json : 2.0.9
pump : 0.1
xarray : 2022.12.0
mom6_tools : 0.0.post244
holoviews : 1.15.2
hvplot : 0.8.2
cf_xarray : 0.7.6
dask : 2022.11.1
sys : 3.10.8 | packaged by conda-forge | (main, Nov 22 2022, 08:26:04) [GCC 10.4.0]
flox : 0.6.5
xgcm : 0.6.1
pandas : 1.5.2
numpy : 1.23.5
dcpy : 0.1.dev385+g121534c
distributed : 2022.11.1
if "client" in locals():
client.close()
del client
if "cluster" in locals():
cluster.close()
cluster = dask_jobqueue.PBSCluster(
cores=2, # The number of cores you want
memory="23GB", # Amount of memory
processes=1, # How many processes
queue="casper", # The type of queue to utilize (/glade/u/apps/dav/opt/usr/bin/execcasper)
# log_directory="/glade/scratch/dcherian/dask/", # Use your local directory
resource_spec="select=1:ncpus=1:mem=23GB", # Specify resources
project="ncgd0011", # Input your project ID here
walltime="02:00:00", # Amount of wall time
interface="ib0", # Interface to use
)
cluster.adapt(maximum_jobs=12, minimum_jobs=2)
client = distributed.Client(cluster)
client
Client
Client-2cde185b-78c9-11ed-b93c-3cecef1aca66
| Connection method: Cluster object | Cluster type: dask_jobqueue.PBSCluster |
| Dashboard: https://jupyterhub.hpc.ucar.edu/stable/user/dcherian/proxy/8787/status |
Cluster Info
PBSCluster
cda56f23
| Dashboard: https://jupyterhub.hpc.ucar.edu/stable/user/dcherian/proxy/8787/status | Workers: 0 |
| Total threads: 0 | Total memory: 0 B |
Scheduler Info
Scheduler
Scheduler-eae8841c-555c-49d8-a550-3ebfcdfee73a
| Comm: tcp://10.12.206.39:38934 | Workers: 0 |
| Dashboard: https://jupyterhub.hpc.ucar.edu/stable/user/dcherian/proxy/8787/status | Total threads: 0 |
| Started: Just now | Total memory: 0 B |
Workers
Read data#
TAO#
tao_Ri = xr.load_dataarray(
"tao-hourly-Ri-seasonal-percentiles.nc"
).cf.guess_coord_axis()
tao_gridded = xr.open_dataset(
os.path.expanduser("~/work/pump/zarrs/tao-gridded-ancillary.zarr"),
chunks="auto",
engine="zarr",
).sel(longitude=-140, time=slice("1996", None))
tao_gridded["depth"].attrs["axis"] = "Z"
tao_gridded["shallowest"].attrs.clear()
tao_gridded["Tz"] = tao_gridded.T.cf.differentiate("Z")
tao_gridded.Tz.attrs = {"long_name": "$T_z$", "units": "℃/m"}
tao_gridded["Sz"] = tao_gridded.S.cf.differentiate("Z")
tao_gridded.Sz.attrs = {"long_name": "$S_z$", "units": "g/kg/m"}
chipod = (
dcpy.oceans.read_cchdo_chipod_file(
"~/datasets/microstructure/osu/chipods_0_140W.nc"
)
.sel(time=slice("2015"))
# move from time on the half hour to on the hour
.coarsen(time=2, boundary="trim")
.mean()
# "Only χpods between 29 and 69 m are used in this analysis as
# deeper χpods are more strongly influenced by the variability of zEUC than by surface forcing."
# - Warner and Moum (2019)
.sel(depth=slice(29, 69))
.reindex(time=tao_gridded.time, method="nearest", tolerance="5min")
.pipe(mixpods.normalize_z, sort=True)
)
tao_gridded = tao_gridded.update(
chipod[["chi", "KT", "eps", "Jq"]]
.reset_coords(drop=True)
.rename({"depth": "depthchi"})
)
tao_gridded = mixpods.prepare(tao_gridded, oni=pump.obs.process_oni())
tao_gridded["Jq"].attrs["standard_name"] = "ocean_vertical_diffusive_heat_flux"
# tao_gridded = tao_gridded.update(
# mixpods.pdf_N2S2(tao_gridded.drop_vars(["shallowest", "zeuc"]))
# )
# tao_gridded = mixpods.load(tao_gridded)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/core/dataset.py:248: UserWarning: The specified Dask chunks separate the stored chunks along dimension "depth" starting at index 42. This could degrade performance. Instead, consider rechunking after loading.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/core/dataset.py:248: UserWarning: The specified Dask chunks separate the stored chunks along dimension "time" starting at index 199726. This could degrade performance. Instead, consider rechunking after loading.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/core/dataset.py:248: UserWarning: The specified Dask chunks separate the stored chunks along dimension "longitude" starting at index 2. This could degrade performance. Instead, consider rechunking after loading.
warnings.warn(
np.log10(tao_gridded.eps).sel(time=slice("2005", "2015")).resample(
time="M"
).mean().hvplot.quadmesh(clim=(-7.5, -6))
sub = tao_gridded.sel(time="2010-01")
t = sub.theta.hvplot.quadmesh(cmap="turbo_r")
dt = (
sub.theta - sub.theta.reset_coords(drop=True).cf.sel(Z=[0, -5]).cf.max("Z")
).hvplot.quadmesh(clim=(-0.15, 0.15), cmap="RdBu_r")
newmld = mixpods.get_mld_tao_theta(sub.theta.reset_coords(drop=True))
(
dt
* sub.reset_coords().mldT.hvplot.line(color="w", line_width=2)
* newmld.reset_coords(drop=True).hvplot.line(color="orange", line_width=1)
).opts(frame_width=1200)
(
tao_gridded.reset_coords().mldT.resample(time="5D").mean().hvplot.line()
* mixpods.get_mld_tao_theta((tao_gridded.reset_coords().theta))
.resample(time="5D")
.mean()
.hvplot.line()
)
tao_gridded.u.cf.plot()
tao_gridded.eucmax.plot()
[<matplotlib.lines.Line2D at 0x2b38c6a4a8c0>]
MITgcm stations#
stations = pump.model.read_stations_20(stationdirname)
gcmeq = stations.sel(
longitude=[-155.025, -140.025, -125.025, -110.025], method="nearest"
)
# enso = pump.obs.make_enso_mask()
# mitgcm["enso"] = enso.reindex(time=mitgcm.time.data, method="nearest")
# gcmeq["eucmax"] = pump.calc.get_euc_max(gcmeq.u)
# pump.calc.calc_reduced_shear(gcmeq)
# oni = pump.obs.process_oni()
# gcmeq["enso_transition"] = mixpods.make_enso_transition_mask(oni).reindex(time=gcmeq.time.data, method="nearest")
mitgcm = gcmeq.sel(longitude=-140.025, method="nearest")
metrics = mixpods.normalize_z(pump.model.read_metrics(stationdirname), sort=True)
mitgcm = mixpods.normalize_z(mitgcm, sort=True)
mitgcm_grid = xgcm.Grid(
metrics.sel(latitude=mitgcm.latitude, longitude=mitgcm.longitude, method="nearest"),
coords=({"Z": {"center": "depth", "outer": "RF"}, "Y": {"center": "latitude"}}),
metrics={"Z": ("drF", "drC")},
periodic=False,
boundary="fill",
fill_value=np.nan,
)
mitgcm.theta.attrs["standard_name"] = "sea_water_potential_temperature"
mitgcm.salt.attrs["standard_name"] = "sea_water_salinity"
mitgcm["KPPviscAz"].attrs["standard_name"] = "ocean_vertical_viscosity"
mitgcm["KPPdiffKzT"].attrs["standard_name"] = "ocean_vertical_heat_diffusivity"
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/core/indexing.py:1374: PerformanceWarning: Slicing is producing a large chunk. To accept the large
chunk and silence this warning, set the option
>>> with dask.config.set(**{'array.slicing.split_large_chunks': False}):
... array[indexer]
To avoid creating the large chunks, set the option
>>> with dask.config.set(**{'array.slicing.split_large_chunks': True}):
... array[indexer]
return self.array[key]
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/core/indexing.py:1374: PerformanceWarning: Slicing is producing a large chunk. To accept the large
chunk and silence this warning, set the option
>>> with dask.config.set(**{'array.slicing.split_large_chunks': False}):
... array[indexer]
To avoid creating the large chunks, set the option
>>> with dask.config.set(**{'array.slicing.split_large_chunks': True}):
... array[indexer]
return self.array[key]
mitgcm = (
mixpods.prepare(mitgcm, grid=mitgcm_grid, oni=pump.obs.process_oni())
.sel(latitude=0, method="nearest")
.cf.sel(Z=slice(-250, 0))
)
mitgcm_grid
<xgcm.Grid>
Z Axis (not periodic, boundary='fill'):
* center depth --> outer
* outer RF --> center
Y Axis (not periodic, boundary='fill'):
* center latitude
mitgcm.u.cf.plot()
mitgcm.mldT.reset_coords(drop=True).cf.plot()
mitgcm.eucmax.reset_coords(drop=True).cf.plot()
[<matplotlib.lines.Line2D at 0x2ae9f91ebdc0>]
mixpods.plot_n2s2pdf(mitgcm.n2s2pdf.sel(enso_transition_phase="none"))
<matplotlib.contour.QuadContourSet at 0x2ae9b69f4ac0>
MOM6#
calculate ONI#
(
oniobs.hvplot.line(x="time", label="obs")
* onimom6.hvplot.line(x="time", label="MOM6")
).opts(ylabel="ONI [°C]")
oniobs.enso_transition_mask.plot()
onimom6.enso_transition_mask.plot(color="r")
[<matplotlib.lines.Line2D at 0x2b6d7fd20eb0>]
MOM6 sections#
Combine sections#
casename = "gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods"
mixpods.mom6_sections_to_zarr(casename)
casename = "gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.epbl.001.mixpods"
mixpods.mom6_sections_to_zarr(casename)
100%|██████████| 31/31 [01:50<00:00, 3.57s/it]
/glade/u/home/dcherian/pump/pump/mixpods.py:887: RuntimeWarning: Converting a CFTimeIndex with dates from a non-standard calendar, 'noleap', to a pandas.DatetimeIndex, which uses dates from the standard calendar. This may lead to subtle errors in operations that depend on the length of time between dates.
mom6tao["time"] = mom6tao.indexes["time"].to_datetimeindex()
casename = "gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.kpp.lmd.002.mixpods"
mixpods.mom6_sections_to_zarr(casename)
casename = "gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.kpp.lmd.003.mixpods"
mixpods.mom6_sections_to_zarr(casename)
100%|██████████| 38/38 [02:07<00:00, 3.36s/it]
/glade/u/home/dcherian/pump/pump/mixpods.py:900: RuntimeWarning: Converting a CFTimeIndex with dates from a non-standard calendar, 'noleap', to a pandas.DatetimeIndex, which uses dates from the standard calendar. This may lead to subtle errors in operations that depend on the length of time between dates.
mom6tao["time"] = mom6tao.indexes["time"].to_datetimeindex()
casename = "gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.kpp.lmd.004.mixpods"
mixpods.mom6_sections_to_zarr(casename)
100%|██████████| 38/38 [02:06<00:00, 3.32s/it]
/glade/u/home/dcherian/pump/pump/mixpods.py:900: RuntimeWarning: Converting a CFTimeIndex with dates from a non-standard calendar, 'noleap', to a pandas.DatetimeIndex, which uses dates from the standard calendar. This may lead to subtle errors in operations that depend on the length of time between dates.
mom6tao["time"] = mom6tao.indexes["time"].to_datetimeindex()
Read sections#
dask.config.set(scheduler=client)
m = mixpods.read_mom6_sections(
"gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods"
)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:255: FutureWarning: job_extra has been renamed to job_extra_directives. You are still using it (even if only set to []; please also check config files). If you did not set job_extra_directives yet, job_extra will be respected for now, but it will be removed in a future release. If you already set job_extra_directives, job_extra is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/core.py:274: FutureWarning: env_extra has been renamed to job_script_prologue. You are still using it (even if only set to []; please also check config files). If you did not set job_script_prologue yet, env_extra will be respected for now, but it will be removed in a future release. If you already set job_script_prologue, env_extra is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask_jobqueue/pbs.py:82: FutureWarning: project has been renamed to account as this kwarg was used wit -A option. You are still using it (please also check config files). If you did not set account yet, project will be respected for now, but it will be removed in a future release. If you already set account, project is ignored and you can remove it.
warnings.warn(warn, FutureWarning)
100%|██████████| 21/21 [02:14<00:00, 6.42s/it]
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/coding/times.py:360: FutureWarning: Index.ravel returning ndarray is deprecated; in a future version this will return a view on self.
sample = dates.ravel()[0]
/glade/u/home/dcherian/pump/pump/mixpods.py:847: RuntimeWarning: Converting a CFTimeIndex with dates from a non-standard calendar, 'noleap', to a pandas.DatetimeIndex, which uses dates from the standard calendar. This may lead to subtle errors in operations that depend on the length of time between dates.
mom6tao["time"] = mom6tao.indexes["time"].to_datetimeindex()
/glade/u/home/dcherian/pump/pump/mixpods.py:859: UserWarning: Kv_v not present. Assuming equal to Kv_u
warnings.warn("Kv_v not present. Assuming equal to Kv_u")
m.drop_vars(["average_DT", "average_T1", "average_T2", "time_bnds"]).chunk(
{"time": 365 * 24}
).to_zarr(
"/glade/scratch/dcherian/archive/gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods/ocn/moorings/tao.zarr",
mode="w",
)
m.sel(yh=0, method="nearest")[["ePBL_h_ML", "mlotst"]].to_array().hvplot.line(
by="variable", x="time"
)
m.sel(yh=0, method="nearest")[["Kd_heat", "Kd_ePBL"]].to_array().hvplot.line(
by="variable", groupby="time", logy=True, ylim=(1e-6, 1e-1), xlim=(0, 500)
)
mom6140 = mixpods.load_mom6_sections(casename).load()
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yq', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/cf_xarray/accessor.py:1638: UserWarning: Variables {'areacello'} not found in object but are referred to in the CF attributes.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yq', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
mom6140.uo.cf.plot(robust=True)
mom6140.eucmax.cf.plot()
mom6140.mldT.cf.plot(lw=0.5, color="k")
mixpods.plot_n2s2pdf(mom6140.n2s2pdf.sel(enso_transition_phase="none"))
<matplotlib.contour.QuadContourSet at 0x2acdb4c75e10>
Pick simulations#
import warnings
datasets = {
"TAO": tao_gridded,
# "MITgcm": mitgcm,
}
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UserWarning)
datasets.update(
{
short_name: mixpods.load_mom6_sections(folder).assign_attrs(
{"title": long_name}
)
for short_name, (long_name, folder) in catalog.items()
}
)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/coding/cftime_offsets.py:1134: FutureWarning: Argument `closed` is deprecated in favor of `inclusive`.
return pd.date_range(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/coding/cftime_offsets.py:1134: FutureWarning: Argument `closed` is deprecated in favor of `inclusive`.
return pd.date_range(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/coding/cftime_offsets.py:1134: FutureWarning: Argument `closed` is deprecated in favor of `inclusive`.
return pd.date_range(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/coding/cftime_offsets.py:1134: FutureWarning: Argument `closed` is deprecated in favor of `inclusive`.
return pd.date_range(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/coding/cftime_offsets.py:1134: FutureWarning: Argument `closed` is deprecated in favor of `inclusive`.
return pd.date_range(
from datatree import DataTree
tree = DataTree.from_dict(
{k: v.sel(time=slice("2000", "2017")) for k, v in datasets.items()}
)
ref = tree["TAO"].ds.reset_coords(drop=True).cf.sel(Z=slice(-120, None))
counts = np.minimum(ref["S2"].cf.count("Z"), ref["N2T"].cf.count("Z")).load()
def calc_histograms(ds):
ds = ds.copy()
ds["tao_mask"] = counts.reindex(time=ds.time, method="nearest") > 5
ds["tao_mask"].attrs = {
"description": "True when there are more than 5 5-m T, u, v in TAO dataset"
}
ds = ds.where(ds.tao_mask)
return ds.update(mixpods.pdf_N2S2(ds))
tree = tree.map_over_subtree(calc_histograms)
tree = mixpods.persist_tree(tree)
Verify depth is normalized#
for name, ds in datasets.items():
(ds.cf["sea_water_x_velocity"].cf["Z"].plot(marker=".", ls="none", label=name))
plt.legend()
<matplotlib.legend.Legend at 0x2aed634274c0>
baseline.zi.diff("zi").plot(marker="x")
[<matplotlib.lines.Line2D at 0x2aad0cefa1d0>]
Compare EUC maximum and MLD#
Monthly climatology#
import tqdm
for node in tqdm.tqdm(tree.children):
tree[node]["mldT"] = tree[node]["mldT"].load()
100%|██████████| 4/4 [00:52<00:00, 13.10s/it]
def to_dataset(tree):
concat = xr.concat([v.ds for v in tree.children.values()], dim="node")
concat["node"] = list(tree.children.keys())
return concat
clim = to_dataset(
tree.map_over_subtree(
lambda x: x.reset_coords()[["mldT", "eucmax"]].groupby("time.month").mean()
)
).load()
clim
<xarray.Dataset>
Dimensions: (node: 4, month: 12)
Coordinates:
* month (month) int64 1 2 3 4 5 6 7 8 9 10 11 12
* node (node) <U9 'TAO' 'MITgcm' 'MOM6 KPP' 'MOM6 ePBL'
Data variables:
mldT (node, month) float64 -28.23 -18.01 -14.84 ... -25.31 -32.05 -33.92
eucmax (node, month) float64 -108.0 -104.2 -95.85 ... -119.1 -120.1 -120.9hv.Layout(
[
hv.Overlay(
[
tree[node]
.ds["mldT"]
.reset_coords(drop=True)
.groupby("time.month")[month]
.hvplot.hist(label=node, legend=True)
.opts(frame_width=150)
for node in tree.children
]
).opts(title=str(month))
for month in np.arange(1, 13)
]
).cols(4)
(clim.mldT.hvplot(by="node") + clim.eucmax.hvplot(by="node")).cols(1)
mixpods.plot_timeseries(tree, "mldT", obs="TAO")